%reload_ext autoreload
%autoreload 2
import pandas as pd
import numpy as np
import plotly.express as px
import plotly.graph_objects as go
from IPython.display import display
df = pd.read_csv("./data/churn_challenge.csv")
# 'customerID' is a series of unique index; set index to customerID
print(df.index.duplicated().sum())
df = df.set_index('customerID')
0
# check the targe to predict; it is reasonably balanced
df['Churn'].value_counts(dropna=False)
No 5174 Yes 1869 Name: Churn, dtype: int64
# quick check of dtypes
df.dtypes
gender object SeniorCitizen int64 Partner object Dependents object tenure int64 PhoneService object MultipleLines object InternetService object OnlineSecurity object OnlineBackup object DeviceProtection object TechSupport object StreamingTV object StreamingMovies object Contract object PaperlessBilling object PaymentMethod object MonthlyCharges float64 TotalCharges object Churn object dtype: object
# Check cardinality of non-numerical features;
# Most of the object features have low cardinality, which can be easily converted to numerical values
# Given the nature of the balanced binary classification problem, target encoding will be used
cols = df.dtypes[df.dtypes=='O'].index
pd.Series(index=cols, data=[len(df[col].unique()) for col in cols])
gender 2 Partner 2 Dependents 2 PhoneService 2 MultipleLines 3 InternetService 3 OnlineSecurity 3 OnlineBackup 3 DeviceProtection 3 TechSupport 3 StreamingTV 3 StreamingMovies 3 Contract 3 PaperlessBilling 2 PaymentMethod 4 TotalCharges 6531 Churn 2 dtype: int64
# Column 'TotalCharges' seems suspicious
# Convert it to float with errors coerced, which helps to identify the special values as ' '
df['TotalCharges_num'] = pd.to_numeric(df['TotalCharges'], errors='coerce')
df.loc[df['TotalCharges_num'].isna(), 'TotalCharges'].values
array([' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' '],
dtype=object)
# Validate the assumption that ' ' in TotalCharges is caused by tenure of 0;
# Drop these records from train/test given its small proportion (other alternatives are possible)
display(df.loc[df['tenure'] == 0, 'TotalCharges'])
print(((df['tenure'] == 0) == (df['TotalCharges_num'].isna())).unique())
x = df['tenure']
y = df['TotalCharges_num'] / df['MonthlyCharges']
fig = go.Figure(layout=go.Layout(height=600, width=600),
data=[go.Scatter(mode='markers', x=x, y=y),
go.Scatter(mode='markers+lines', x=[0,80], y=[0,80])])
fig.show()
df = df[df['tenure'] != 0].drop('TotalCharges_num', axis=1)
df['TotalCharges'] = df['TotalCharges'].astype(float)
customerID 4472-LVYGI 3115-CZMZD 5709-LVOEQ 4367-NUYAO 1371-DWPAZ 7644-OMVMY 3213-VVOLG 2520-SGTTA 2923-ARZLG 4075-WKNIU 2775-SEFEE Name: TotalCharges, dtype: object
[ True]
cols = df.dtypes[df.dtypes != 'O'].index
pd.Series(index=cols, data=[len(df[col].unique()) for col in cols])
SeniorCitizen 2 tenure 72 MonthlyCharges 1584 TotalCharges 6530 dtype: int64
# Column 'SeniorCitizen' will also be treated as categorical feature and target encoded
df['SeniorCitizen'] = df['SeniorCitizen'].astype(str)
# Check the frequency for categorical features
# It seems that there are adequate records for all unique values in all features
# So we can implement target encoding as proposed earlier
freqs = []
cols = df.dtypes[df.dtypes == 'O'].index
for col in cols:
freq = round(df[col].value_counts().sort_values() / len(df), 2)
freq = freq.reset_index().rename(columns={col:'proportion', 'index':'value'})
freq['feature'] = col
freqs.append(freq)
freqs = pd.concat(freqs, axis=0)
display(freqs)
fig = px.bar(freqs, x='feature', y='proportion', text='value')
fig.show()
| value | proportion | feature | |
|---|---|---|---|
| 0 | Female | 0.50 | gender |
| 1 | Male | 0.50 | gender |
| 0 | 1 | 0.16 | SeniorCitizen |
| 1 | 0 | 0.84 | SeniorCitizen |
| 0 | Yes | 0.48 | Partner |
| 1 | No | 0.52 | Partner |
| 0 | Yes | 0.30 | Dependents |
| 1 | No | 0.70 | Dependents |
| 0 | No | 0.10 | PhoneService |
| 1 | Yes | 0.90 | PhoneService |
| 0 | No phone service | 0.10 | MultipleLines |
| 1 | Yes | 0.42 | MultipleLines |
| 2 | No | 0.48 | MultipleLines |
| 0 | No | 0.22 | InternetService |
| 1 | DSL | 0.34 | InternetService |
| 2 | Fiber optic | 0.44 | InternetService |
| 0 | No internet service | 0.22 | OnlineSecurity |
| 1 | Yes | 0.29 | OnlineSecurity |
| 2 | No | 0.50 | OnlineSecurity |
| 0 | No internet service | 0.22 | OnlineBackup |
| 1 | Yes | 0.34 | OnlineBackup |
| 2 | No | 0.44 | OnlineBackup |
| 0 | No internet service | 0.22 | DeviceProtection |
| 1 | Yes | 0.34 | DeviceProtection |
| 2 | No | 0.44 | DeviceProtection |
| 0 | No internet service | 0.22 | TechSupport |
| 1 | Yes | 0.29 | TechSupport |
| 2 | No | 0.49 | TechSupport |
| 0 | No internet service | 0.22 | StreamingTV |
| 1 | Yes | 0.38 | StreamingTV |
| 2 | No | 0.40 | StreamingTV |
| 0 | No internet service | 0.22 | StreamingMovies |
| 1 | Yes | 0.39 | StreamingMovies |
| 2 | No | 0.40 | StreamingMovies |
| 0 | One year | 0.21 | Contract |
| 1 | Two year | 0.24 | Contract |
| 2 | Month-to-month | 0.55 | Contract |
| 0 | No | 0.41 | PaperlessBilling |
| 1 | Yes | 0.59 | PaperlessBilling |
| 0 | Credit card (automatic) | 0.22 | PaymentMethod |
| 1 | Bank transfer (automatic) | 0.22 | PaymentMethod |
| 2 | Mailed check | 0.23 | PaymentMethod |
| 3 | Electronic check | 0.34 | PaymentMethod |
| 0 | Yes | 0.27 | Churn |
| 1 | No | 0.73 | Churn |
# Check distribution of numerical features
# All three features are reasonably 'scattered' to be modelled as continuous variables
cols = df.dtypes[df.dtypes != 'O'].index
for col in cols:
fig = px.histogram(data_frame=df, x=col, title=col)
fig.show()
import tensorflow as tf
from churnfli.train_test import load_train_test
from churnfli.metrics import dev_cols
data = load_train_test()
clf = tf.keras.models.Sequential([
tf.keras.layers.Dense(2, activation='sigmoid'),
tf.keras.layers.Dense(1, activation='sigmoid')])
opt = tf.keras.optimizers.Adam(learning_rate=0.0003, beta_1=0.9,
beta_2=0.999, epsilon=1e-6, amsgrad=False,
name='Adam')
clf.compile(optimizer=opt, loss='categorical_crossentropy', metrics=tf.keras.metrics.AUC())
clf.fit(data['train_features'].values, data['train_labels'].values,
batch_size=512, epochs=100, validation_split=0.2)
Epoch 1/100 9/9 [==============================] - 1s 25ms/step - loss: 0.0000e+00 - auc_12: 0.6435 - val_loss: 0.0000e+00 - val_auc_12: 0.6323 Epoch 2/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6473 - val_loss: 0.0000e+00 - val_auc_12: 0.6447 Epoch 3/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6528 - val_loss: 0.0000e+00 - val_auc_12: 0.6429 Epoch 4/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6568 - val_loss: 0.0000e+00 - val_auc_12: 0.6538 Epoch 5/100 9/9 [==============================] - 0s 6ms/step - loss: 0.0000e+00 - auc_12: 0.6630 - val_loss: 0.0000e+00 - val_auc_12: 0.6568 Epoch 6/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6681 - val_loss: 0.0000e+00 - val_auc_12: 0.6652 Epoch 7/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6730 - val_loss: 0.0000e+00 - val_auc_12: 0.6707 Epoch 8/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6793 - val_loss: 0.0000e+00 - val_auc_12: 0.6763 Epoch 9/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6837 - val_loss: 0.0000e+00 - val_auc_12: 0.6833 Epoch 10/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6914 - val_loss: 0.0000e+00 - val_auc_12: 0.6888 Epoch 11/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.6971 - val_loss: 0.0000e+00 - val_auc_12: 0.6959 Epoch 12/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7009 - val_loss: 0.0000e+00 - val_auc_12: 0.7012 Epoch 13/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7094 - val_loss: 0.0000e+00 - val_auc_12: 0.7084 Epoch 14/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7131 - val_loss: 0.0000e+00 - val_auc_12: 0.7119 Epoch 15/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7159 - val_loss: 0.0000e+00 - val_auc_12: 0.7204 Epoch 16/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7245 - val_loss: 0.0000e+00 - val_auc_12: 0.7249 Epoch 17/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7256 - val_loss: 0.0000e+00 - val_auc_12: 0.7314 Epoch 18/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7326 - val_loss: 0.0000e+00 - val_auc_12: 0.7368 Epoch 19/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7396 - val_loss: 0.0000e+00 - val_auc_12: 0.7377 Epoch 20/100 9/9 [==============================] - 0s 5ms/step - loss: 0.0000e+00 - auc_12: 0.7393 - val_loss: 0.0000e+00 - val_auc_12: 0.7423 Epoch 21/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7449 - val_loss: 0.0000e+00 - val_auc_12: 0.7477 Epoch 22/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7472 - val_loss: 0.0000e+00 - val_auc_12: 0.7522 Epoch 23/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7510 - val_loss: 0.0000e+00 - val_auc_12: 0.7529 Epoch 24/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7552 - val_loss: 0.0000e+00 - val_auc_12: 0.7567 Epoch 25/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7571 - val_loss: 0.0000e+00 - val_auc_12: 0.7606 Epoch 26/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7608 - val_loss: 0.0000e+00 - val_auc_12: 0.7635 Epoch 27/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7635 - val_loss: 0.0000e+00 - val_auc_12: 0.7659 Epoch 28/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7624 - val_loss: 0.0000e+00 - val_auc_12: 0.7678 Epoch 29/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7678 - val_loss: 0.0000e+00 - val_auc_12: 0.7729 Epoch 30/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7637 - val_loss: 0.0000e+00 - val_auc_12: 0.7751 Epoch 31/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7701 - val_loss: 0.0000e+00 - val_auc_12: 0.7740 Epoch 32/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7691 - val_loss: 0.0000e+00 - val_auc_12: 0.7719 Epoch 33/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7684 - val_loss: 0.0000e+00 - val_auc_12: 0.7775 Epoch 34/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7709 - val_loss: 0.0000e+00 - val_auc_12: 0.7748 Epoch 35/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7678 - val_loss: 0.0000e+00 - val_auc_12: 0.7804 Epoch 36/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7745 - val_loss: 0.0000e+00 - val_auc_12: 0.7808 Epoch 37/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7670 - val_loss: 0.0000e+00 - val_auc_12: 0.7792 Epoch 38/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7699 - val_loss: 0.0000e+00 - val_auc_12: 0.7779 Epoch 39/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7704 - val_loss: 0.0000e+00 - val_auc_12: 0.7808 Epoch 40/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7710 - val_loss: 0.0000e+00 - val_auc_12: 0.7820 Epoch 41/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7714 - val_loss: 0.0000e+00 - val_auc_12: 0.7804 Epoch 42/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7708 - val_loss: 0.0000e+00 - val_auc_12: 0.7803 Epoch 43/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7724 - val_loss: 0.0000e+00 - val_auc_12: 0.7769 Epoch 44/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7705 - val_loss: 0.0000e+00 - val_auc_12: 0.7806 Epoch 45/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7696 - val_loss: 0.0000e+00 - val_auc_12: 0.7801 Epoch 46/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7731 - val_loss: 0.0000e+00 - val_auc_12: 0.7799 Epoch 47/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7684 - val_loss: 0.0000e+00 - val_auc_12: 0.7830 Epoch 48/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7696 - val_loss: 0.0000e+00 - val_auc_12: 0.7785 Epoch 49/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7678 - val_loss: 0.0000e+00 - val_auc_12: 0.7821 Epoch 50/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7687 - val_loss: 0.0000e+00 - val_auc_12: 0.7829 Epoch 51/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7676 - val_loss: 0.0000e+00 - val_auc_12: 0.7781 Epoch 52/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7664 - val_loss: 0.0000e+00 - val_auc_12: 0.7834 Epoch 53/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7660 - val_loss: 0.0000e+00 - val_auc_12: 0.7804 Epoch 54/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7661 - val_loss: 0.0000e+00 - val_auc_12: 0.7769 Epoch 55/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7668 - val_loss: 0.0000e+00 - val_auc_12: 0.7801 Epoch 56/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7666 - val_loss: 0.0000e+00 - val_auc_12: 0.7782 Epoch 57/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7661 - val_loss: 0.0000e+00 - val_auc_12: 0.7801 Epoch 58/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7644 - val_loss: 0.0000e+00 - val_auc_12: 0.7799 Epoch 59/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7633 - val_loss: 0.0000e+00 - val_auc_12: 0.7770 Epoch 60/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7655 - val_loss: 0.0000e+00 - val_auc_12: 0.7812 Epoch 61/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7646 - val_loss: 0.0000e+00 - val_auc_12: 0.7787 Epoch 62/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7642 - val_loss: 0.0000e+00 - val_auc_12: 0.7752 Epoch 63/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7653 - val_loss: 0.0000e+00 - val_auc_12: 0.7784 Epoch 64/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7639 - val_loss: 0.0000e+00 - val_auc_12: 0.7768 Epoch 65/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7604 - val_loss: 0.0000e+00 - val_auc_12: 0.7756 Epoch 66/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7626 - val_loss: 0.0000e+00 - val_auc_12: 0.7783 Epoch 67/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7622 - val_loss: 0.0000e+00 - val_auc_12: 0.7738 Epoch 68/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7617 - val_loss: 0.0000e+00 - val_auc_12: 0.7764 Epoch 69/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7619 - val_loss: 0.0000e+00 - val_auc_12: 0.7745 Epoch 70/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7590 - val_loss: 0.0000e+00 - val_auc_12: 0.7721 Epoch 71/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7602 - val_loss: 0.0000e+00 - val_auc_12: 0.7754 Epoch 72/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7618 - val_loss: 0.0000e+00 - val_auc_12: 0.7760 Epoch 73/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7586 - val_loss: 0.0000e+00 - val_auc_12: 0.7701 Epoch 74/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7590 - val_loss: 0.0000e+00 - val_auc_12: 0.7768 Epoch 75/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7597 - val_loss: 0.0000e+00 - val_auc_12: 0.7750 Epoch 76/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7577 - val_loss: 0.0000e+00 - val_auc_12: 0.7694 Epoch 77/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7580 - val_loss: 0.0000e+00 - val_auc_12: 0.7743 Epoch 78/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7608 - val_loss: 0.0000e+00 - val_auc_12: 0.7757 Epoch 79/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7588 - val_loss: 0.0000e+00 - val_auc_12: 0.7703 Epoch 80/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7573 - val_loss: 0.0000e+00 - val_auc_12: 0.7736 Epoch 81/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7571 - val_loss: 0.0000e+00 - val_auc_12: 0.7734 Epoch 82/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7526 - val_loss: 0.0000e+00 - val_auc_12: 0.7709 Epoch 83/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7543 - val_loss: 0.0000e+00 - val_auc_12: 0.7696 Epoch 84/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7573 - val_loss: 0.0000e+00 - val_auc_12: 0.7703 Epoch 85/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7510 - val_loss: 0.0000e+00 - val_auc_12: 0.7700 Epoch 86/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7535 - val_loss: 0.0000e+00 - val_auc_12: 0.7700 Epoch 87/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7573 - val_loss: 0.0000e+00 - val_auc_12: 0.7709 Epoch 88/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7537 - val_loss: 0.0000e+00 - val_auc_12: 0.7702 Epoch 89/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7539 - val_loss: 0.0000e+00 - val_auc_12: 0.7672 Epoch 90/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7559 - val_loss: 0.0000e+00 - val_auc_12: 0.7710 Epoch 91/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7533 - val_loss: 0.0000e+00 - val_auc_12: 0.7683 Epoch 92/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7532 - val_loss: 0.0000e+00 - val_auc_12: 0.7664 Epoch 93/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7523 - val_loss: 0.0000e+00 - val_auc_12: 0.7684 Epoch 94/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7530 - val_loss: 0.0000e+00 - val_auc_12: 0.7678 Epoch 95/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7492 - val_loss: 0.0000e+00 - val_auc_12: 0.7634 Epoch 96/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7525 - val_loss: 0.0000e+00 - val_auc_12: 0.7682 Epoch 97/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7527 - val_loss: 0.0000e+00 - val_auc_12: 0.7662 Epoch 98/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7497 - val_loss: 0.0000e+00 - val_auc_12: 0.7619 Epoch 99/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7487 - val_loss: 0.0000e+00 - val_auc_12: 0.7669 Epoch 100/100 9/9 [==============================] - 0s 4ms/step - loss: 0.0000e+00 - auc_12: 0.7509 - val_loss: 0.0000e+00 - val_auc_12: 0.7660
<tensorflow.python.keras.callbacks.History at 0x7f8e206fab90>
# df_test = data['test_labels'].to_frame()
# df_test['DNN_prob'] = clf.predict(data['test_features'])
# from mebank_fli.metrics import compute_gini
# compute_gini(df_test['churn'], df_test['DNN_prob'])